Working through this keras tutorial.


In [2]:
import numpy as np
np.random.seed(123)

In [3]:
from matplotlib import pyplot as plt
%matplotlib inline

In [4]:
from keras.models import Sequential

from keras.layers import Dense, Activation, Dropout, Flatten

from keras.layers import Convolution2D, MaxPooling2D

from keras.utils import np_utils

from keras.datasets import mnist


Using Theano backend.
Using gpu device 0: GeForce GTX 960M (CNMeM is disabled, cuDNN not available)
WARNING (theano.gof.compilelock): Overriding existing lock by dead process '12092' (I am process '3152')

In [5]:
(X_train_unpro, Y_train_unpro), (X_test_unpro, Y_test_unpro) = mnist.load_data()

In [6]:
Y_train_unpro[1]


Out[6]:
0

In [7]:
print(X_train_unpro.shape)


(60000, 28, 28)

In [8]:
plt.imshow(X_train_unpro[1]);



In [9]:
(train_examples, width, height), depth = X_train_unpro.shape, 1
test_examples, _, _ = X_test_unpro.shape

In [10]:
X_train_reshaped = X_train_unpro.reshape(train_examples, depth, width, height)
X_test_reshaped = X_test_unpro.reshape(test_examples, depth, width, height)

In [11]:
print(X_train_reshaped.shape, X_test_reshaped.shape)


(60000, 1, 28, 28) (10000, 1, 28, 28)

In [12]:
X_train = X_train_reshaped.astype('float32')
X_test = X_test_reshaped.astype('float32')

X_train /= 255
X_test /= 255

In [13]:
Y_train = np_utils.to_categorical(Y_train_unpro, 10)
Y_test = np_utils.to_categorical(Y_test_unpro, 10)

In [14]:
print(Y_train.shape, Y_test.shape)


(60000, 10) (10000, 10)

In [27]:
model = Sequential()

In [28]:
model.add(Convolution2D(32, 3, 3, activation='relu', input_shape=(1,28,28), dim_ordering='th'))

The ouput shape was coming up

(None, -1, 26, 32)

and not the expected return below. I found this git issue which seemed to suggest adding the dim_ordering argument with value th. Not sure what that means right now. Later!


In [30]:
print(model.output_shape)


(None, 32, 26, 26)

In [31]:
model.add(Convolution2D(32,3,3, activation='relu'))

In [32]:
model.add(MaxPooling2D(pool_size=(2,2)))

In [33]:
model.add(Dropout(0.25))

In [34]:
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))

In [35]:
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])

In [36]:
model.fit(X_train, Y_train, 
          batch_size=32, nb_epoch=10, verbose=1)


Epoch 1/10
60000/60000 [==============================] - 83s - loss: 0.2759 - acc: 0.9153    
Epoch 2/10
60000/60000 [==============================] - 83s - loss: 0.1137 - acc: 0.9669    
Epoch 3/10
60000/60000 [==============================] - 83s - loss: 0.0859 - acc: 0.9742    
Epoch 4/10
60000/60000 [==============================] - 83s - loss: 0.0698 - acc: 0.9789    
Epoch 5/10
60000/60000 [==============================] - 83s - loss: 0.0591 - acc: 0.9821    
Epoch 6/10
60000/60000 [==============================] - 83s - loss: 0.0539 - acc: 0.9835    
Epoch 7/10
60000/60000 [==============================] - 83s - loss: 0.0470 - acc: 0.9854    
Epoch 8/10
60000/60000 [==============================] - 83s - loss: 0.0433 - acc: 0.9870    
Epoch 9/10
60000/60000 [==============================] - 83s - loss: 0.0400 - acc: 0.9875    
Epoch 10/10
60000/60000 [==============================] - 83s - loss: 0.0359 - acc: 0.9888    
Out[36]:
<keras.callbacks.History at 0x1d020898>

In [37]:
score = model.evaluate(X_test, Y_test, verbose=0)

In [38]:
score


Out[38]:
[0.035090916357164631, 0.98939999999999995]

In [ ]: